In [0]:
from google.colab import drive
drive.mount('/content/gdrive')
Go to this URL in a browser: https://accounts.google.com/o/oauth2/auth?client_id=947318989803-6bn6qk8qdgf4n4g3pfee6491hc0brc4i.apps.googleusercontent.com&redirect_uri=urn%3Aietf%3Awg%3Aoauth%3A2.0%3Aoob&scope=email%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdocs.test%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fdrive.photos.readonly%20https%3A%2F%2Fwww.googleapis.com%2Fauth%2Fpeopleapi.readonly&response_type=code

Enter your authorization code:
··········
Mounted at /content/gdrive
In [0]:
import pandas as pd
import numpy as np
import os
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from mpl_toolkits.axes_grid1 import ImageGrid
import seaborn as sns
import re
from keras.preprocessing import image
Using TensorFlow backend.
In [0]:
def data_information(data_dir):
    
  
    class_names = os.listdir(data_dir)
    class_names.remove("Soybean___healthy")
    class_names = set(class_names)
  
    species = set() 
    for a in class_names: 
        idx = a.find("___")
        b = a[:idx]
        species.add(b)
      
    disease = set()
    for x in class_names:
        idx2 = x.find("___")
        y = x[idx2+3:]
        disease.add(y)
    
    return class_names, species, disease
In [0]:
data_dir = "PlantVillage/train"
class_names, species, disease = data_information(data_dir)
In [0]:
print("no of classes are:", len((class_names)))
print("no of species are:", len((species)))
print("no of diseases are:", len((disease)))
no of classes are: 37
no of species are: 13
no of diseases are: 21
In [0]:
class_names
Out[0]:
{'Apple___Apple_scab',
 'Apple___Black_rot',
 'Apple___Cedar_apple_rust',
 'Apple___healthy',
 'Blueberry___healthy',
 'Cherry_(including_sour)___Powdery_mildew',
 'Cherry_(including_sour)___healthy',
 'Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot',
 'Corn_(maize)___Common_rust_',
 'Corn_(maize)___Northern_Leaf_Blight',
 'Corn_(maize)___healthy',
 'Grape___Black_rot',
 'Grape___Esca_(Black_Measles)',
 'Grape___Leaf_blight_(Isariopsis_Leaf_Spot)',
 'Grape___healthy',
 'Orange___Haunglongbing_(Citrus_greening)',
 'Peach___Bacterial_spot',
 'Peach___healthy',
 'Pepper,_bell___Bacterial_spot',
 'Pepper,_bell___healthy',
 'Potato___Early_blight',
 'Potato___Late_blight',
 'Potato___healthy',
 'Raspberry___healthy',
 'Squash___Powdery_mildew',
 'Strawberry___Leaf_scorch',
 'Strawberry___healthy',
 'Tomato___Bacterial_spot',
 'Tomato___Early_blight',
 'Tomato___Late_blight',
 'Tomato___Leaf_Mold',
 'Tomato___Septoria_leaf_spot',
 'Tomato___Spider_mites Two-spotted_spider_mite',
 'Tomato___Target_Spot',
 'Tomato___Tomato_Yellow_Leaf_Curl_Virus',
 'Tomato___Tomato_mosaic_virus',
 'Tomato___healthy'}
In [0]:
species
Out[0]:
{'Apple',
 'Blueberry',
 'Cherry_(including_sour)',
 'Corn_(maize)',
 'Grape',
 'Orange',
 'Peach',
 'Pepper,_bell',
 'Potato',
 'Raspberry',
 'Squash',
 'Strawberry',
 'Tomato'}
In [0]:
disease
Out[0]:
{'Apple_scab',
 'Bacterial_spot',
 'Black_rot',
 'Cedar_apple_rust',
 'Cercospora_leaf_spot Gray_leaf_spot',
 'Common_rust_',
 'Early_blight',
 'Esca_(Black_Measles)',
 'Haunglongbing_(Citrus_greening)',
 'Late_blight',
 'Leaf_Mold',
 'Leaf_blight_(Isariopsis_Leaf_Spot)',
 'Leaf_scorch',
 'Northern_Leaf_Blight',
 'Powdery_mildew',
 'Septoria_leaf_spot',
 'Spider_mites Two-spotted_spider_mite',
 'Target_Spot',
 'Tomato_Yellow_Leaf_Curl_Virus',
 'Tomato_mosaic_virus',
 'healthy'}
In [0]:
image_counts = {}
test_count = []
for name in class_names:
    path = data_dir + "/" + name
    path_test = "PlantVillage/test" + "/" + name
    image_list = os.listdir(path)
    image_list_test = os.listdir(path_test)
    length_test = len(image_list_test)
    test_count.append(length_test)
    length = len(image_list)
    image_counts[name] = length
count = pd.DataFrame.from_dict(image_counts, orient = "index")
count.columns = ["train_count"]
count["test_count"] = test_count
count
Out[0]:
train_count test_count
Grape___Leaf_blight_(Isariopsis_Leaf_Spot) 861 215
Tomato___Early_blight 800 200
Peach___Bacterial_spot 1838 459
Tomato___Late_blight 1527 382
Potato___healthy 121 31
Tomato___Tomato_Yellow_Leaf_Curl_Virus 4286 1071
Corn_(maize)___Cercospora_leaf_spot Gray_leaf_spot 410 103
Tomato___Leaf_Mold 761 191
Pepper,_bell___Bacterial_spot 797 200
Tomato___Bacterial_spot 1702 425
Grape___Black_rot 944 236
Raspberry___healthy 297 74
Strawberry___Leaf_scorch 887 222
Tomato___Septoria_leaf_spot 1417 354
Corn_(maize)___Northern_Leaf_Blight 788 197
Tomato___healthy 1273 318
Tomato___Spider_mites Two-spotted_spider_mite 1341 335
Orange___Haunglongbing_(Citrus_greening) 4405 1102
Cherry_(including_sour)___Powdery_mildew 842 210
Potato___Late_blight 800 200
Grape___healthy 339 84
Potato___Early_blight 800 200
Apple___Black_rot 496 125
Apple___Apple_scab 504 126
Corn_(maize)___healthy 929 233
Peach___healthy 288 72
Cherry_(including_sour)___healthy 684 170
Strawberry___healthy 364 92
Apple___healthy 1316 329
Apple___Cedar_apple_rust 220 55
Pepper,_bell___healthy 1183 295
Blueberry___healthy 1202 300
Squash___Powdery_mildew 1468 367
Tomato___Tomato_mosaic_virus 299 74
Corn_(maize)___Common_rust_ 953 239
Tomato___Target_Spot 1123 281
Grape___Esca_(Black_Measles) 1107 276
In [0]:
ax = count.plot(kind = "bar", figsize=(20,10))
ax.set_xlabel("class_name")
ax.set_ylabel("count")
Out[0]:
Text(0, 0.5, 'count')
In [0]:
comb = []
for a in disease:
    for b in species:
        s = b + "___" + a
        if s in class_names:
            out = (a,b,1)
        else:
            out = (a,b,0)
        comb.append(out)
data = pd.DataFrame(comb)
data.columns = ["Disease","Specie","class"]
data_matrix = data.pivot_table(values = "class", index = "Disease", columns = "Specie")
data_matrix
Out[0]:
Specie Apple Blueberry Cherry_(including_sour) Corn_(maize) Grape Orange Peach Pepper,_bell Potato Raspberry Squash Strawberry Tomato
Disease
Apple_scab 1 0 0 0 0 0 0 0 0 0 0 0 0
Bacterial_spot 0 0 0 0 0 0 1 1 0 0 0 0 1
Black_rot 1 0 0 0 1 0 0 0 0 0 0 0 0
Cedar_apple_rust 1 0 0 0 0 0 0 0 0 0 0 0 0
Cercospora_leaf_spot Gray_leaf_spot 0 0 0 1 0 0 0 0 0 0 0 0 0
Common_rust_ 0 0 0 1 0 0 0 0 0 0 0 0 0
Early_blight 0 0 0 0 0 0 0 0 1 0 0 0 1
Esca_(Black_Measles) 0 0 0 0 1 0 0 0 0 0 0 0 0
Haunglongbing_(Citrus_greening) 0 0 0 0 0 1 0 0 0 0 0 0 0
Late_blight 0 0 0 0 0 0 0 0 1 0 0 0 1
Leaf_Mold 0 0 0 0 0 0 0 0 0 0 0 0 1
Leaf_blight_(Isariopsis_Leaf_Spot) 0 0 0 0 1 0 0 0 0 0 0 0 0
Leaf_scorch 0 0 0 0 0 0 0 0 0 0 0 1 0
Northern_Leaf_Blight 0 0 0 1 0 0 0 0 0 0 0 0 0
Powdery_mildew 0 0 1 0 0 0 0 0 0 0 1 0 0
Septoria_leaf_spot 0 0 0 0 0 0 0 0 0 0 0 0 1
Spider_mites Two-spotted_spider_mite 0 0 0 0 0 0 0 0 0 0 0 0 1
Target_Spot 0 0 0 0 0 0 0 0 0 0 0 0 1
Tomato_Yellow_Leaf_Curl_Virus 0 0 0 0 0 0 0 0 0 0 0 0 1
Tomato_mosaic_virus 0 0 0 0 0 0 0 0 0 0 0 0 1
healthy 1 1 1 1 1 0 1 1 1 1 0 1 1
In [0]:
data = []
for class_name in (class_names):
    path = "PlantVillage/train" + "/" + class_name
    for file in os.listdir(path):
        data.append([path + "/" + file, class_name])

data_train = pd.DataFrame(data, columns=['file', 'class_name'])
data_train = data_train.drop_duplicates()
print(len(data_train))
data_train.head()
39372
Out[0]:
file class_name
0 PlantVillage/train/Grape___Leaf_blight_(Isario... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
1 PlantVillage/train/Grape___Leaf_blight_(Isario... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
2 PlantVillage/train/Grape___Leaf_blight_(Isario... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
3 PlantVillage/train/Grape___Leaf_blight_(Isario... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
4 PlantVillage/train/Grape___Leaf_blight_(Isario... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
In [0]:
data_test = []
for class_name in (class_names):
    path = "PlantVillage/test" + "/" + class_name
    for file in os.listdir(path):
        data_test.append([path + "/" + file, class_name])
        
test_data = pd.DataFrame(data_test, columns=['file', 'class_name'])
test_data = test_data.drop_duplicates()
print(len(test_data))
test_data.head()
9843
Out[0]:
file class_name
0 PlantVillage/test/Grape___Leaf_blight_(Isariop... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
1 PlantVillage/test/Grape___Leaf_blight_(Isariop... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
2 PlantVillage/test/Grape___Leaf_blight_(Isariop... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
3 PlantVillage/test/Grape___Leaf_blight_(Isariop... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
4 PlantVillage/test/Grape___Leaf_blight_(Isariop... Grape___Leaf_blight_(Isariopsis_Leaf_Spot)
In [0]:
print("no of training images are:", len(data_train))
print("no of testing images are:", len(data_test))
no of training images are: 39372
no of testing images are: 9843
In [0]:
import cv2
import numpy as np
from matplotlib import pyplot as plt
from google.colab.patches import cv2_imshow

img = cv2.imread(data_train["file"][10], -1)
cv2_imshow(img)

color = ('b','g','r')
for channel,col in enumerate(color):
    histr = cv2.calcHist([img],[channel],None,[256],[0,256])
    plt.plot(histr,color = col)
    plt.xlim([0,256])
plt.ylabel(' Number of pixels in the image')
plt.xlabel(' Brightness value')

plt.show()
#plt.title('Histogram for color scale picture')
In [0]:
def read_img(filepath, size):
    img = image.load_img( filepath, target_size=size)
    img = image.img_to_array(img)
    return img

def format_name(s):
    return re.sub('_+', ' ', s)
In [0]:
num_classes = len(class_names)
fig = plt.figure(1, figsize=(10, 40))
grid = ImageGrid(fig, 111, nrows_ncols=(num_classes, 10), axes_pad=0.05)

i = 0
for label, class_name in enumerate(class_names):
    for filepath in data_train[data_train['class_name'] == class_name]['file'].values[:10]:
        ax = grid[i]
        img = read_img(filepath, (256, 256))
        ax.imshow(img / 255.)
        ax.axis('off')
        if i % 10 == 10 - 1:
            name = format_name(class_name)
            ax.text(260, 112, name , verticalalignment='center')
        i += 1
        
plt.show();
In [0]:
from sklearn.model_selection import train_test_split
train_files, valid_files, train_target, valid_target = train_test_split(data_train["file"], data_train["class_name"], test_size=0.2, random_state=42)
In [0]:
train = np.array(train_files)
valid = np.array(valid_files)
target_train = pd.get_dummies(train_target).values
target_valid = pd.get_dummies(valid_target).values
test = np.array(test_data["file"])
target_test = pd.get_dummies(test_data["class_name"]).values
In [0]:
print("final training data size is:",train.shape[0])
print("final validation data size is:",valid.shape[0])
print("final testing data size is:",test.shape[0])
final training data size is: 31497
final validation data size is: 7875
final testing data size is: 9843
In [0]:
from keras.preprocessing import image                  
from tqdm import tqdm

def path_to_tensor(img_path):
    # loads RGB image as PIL.Image.Image type
    img = image.load_img(img_path, target_size=(64, 64))
    # convert PIL.Image.Image type to 3D tensor with shape (224, 224, 3)
    x = image.img_to_array(img)
    # convert 3D tensor to 4D tensor with shape (1, 224, 224, 3) and return 4D tensor
    return np.expand_dims(x, axis=0)

def paths_to_tensor(img_paths):
    list_of_tensors = [path_to_tensor(img_path) for img_path in tqdm(img_paths)]
    return np.vstack(list_of_tensors)
In [0]:
from PIL import ImageFile                            
ImageFile.LOAD_TRUNCATED_IMAGES = True                 

# pre-process the data for Keras
train_tensors = paths_to_tensor(train).astype('float32')/255
100%|██████████| 31497/31497 [00:40<00:00, 783.15it/s]
In [0]:
valid_tensors = paths_to_tensor(valid).astype('float32')/255
100%|██████████| 7875/7875 [00:10<00:00, 760.15it/s]
In [0]:
test_tensors = paths_to_tensor(test).astype('float32')/255
100%|██████████| 9843/9843 [00:13<00:00, 735.54it/s]
In [0]:
from keras.layers import Conv2D, MaxPooling2D, GlobalAveragePooling2D
from keras.layers import Dropout, Flatten, Dense
from keras.models import Sequential

model = Sequential()

### TODO: Define your architecture.

model.add(Conv2D(filters = 16, kernel_size = 2, padding = "same", activation = "relu", input_shape = (64,64,3)))
model.add(MaxPooling2D(pool_size = 2))
#model.add(Dropout(0.2))
model.add(Conv2D(filters = 32, kernel_size = 2, padding = "same", activation = "relu"))
model.add(MaxPooling2D(pool_size = 2))
#model.add(Dropout(0.2))
model.add(Conv2D(filters = 64, kernel_size = 2, padding = "same", activation = "relu"))
model.add(MaxPooling2D(pool_size = 2))
model.add(Conv2D(filters = 128, kernel_size = 2, padding = "same", activation = "relu"))
model.add(MaxPooling2D(pool_size = 2))
#model.add(Conv2D(filters = 256, kernel_size = 2, padding = "same", activation = "relu"))
#model.add(MaxPooling2D(pool_size = 2))
#model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(500, activation='relu'))
#model.add(Dropout(0.4))
model.add(Dense(37, activation='softmax'))

model.summary()
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/framework/op_def_library.py:263: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d_1 (Conv2D)            (None, 64, 64, 16)        208       
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 32, 32, 16)        0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 32, 32, 32)        2080      
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 16, 16, 32)        0         
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 16, 16, 64)        8256      
_________________________________________________________________
max_pooling2d_3 (MaxPooling2 (None, 8, 8, 64)          0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 8, 8, 128)         32896     
_________________________________________________________________
max_pooling2d_4 (MaxPooling2 (None, 4, 4, 128)         0         
_________________________________________________________________
flatten_1 (Flatten)          (None, 2048)              0         
_________________________________________________________________
dense_1 (Dense)              (None, 500)               1024500   
_________________________________________________________________
dense_2 (Dense)              (None, 37)                18537     
=================================================================
Total params: 1,086,477
Trainable params: 1,086,477
Non-trainable params: 0
_________________________________________________________________
In [0]:
model.compile(optimizer='rmsprop', loss='categorical_crossentropy', metrics=['accuracy'])
In [0]:
from keras.callbacks import ModelCheckpoint  

epochs = 50
checkpointer = ModelCheckpoint(filepath='weights.final.from_scratch.hdf5', 
                               verbose=1, save_best_only=True, monitor='val_acc')

model.fit(train_tensors, target_train, 
          validation_data=(valid_tensors, target_valid),
          epochs=epochs, batch_size=20, callbacks=[checkpointer], verbose=1)
WARNING:tensorflow:From /usr/local/lib/python3.6/dist-packages/tensorflow/python/ops/math_ops.py:3066: to_int32 (from tensorflow.python.ops.math_ops) is deprecated and will be removed in a future version.
Instructions for updating:
Use tf.cast instead.
Train on 31497 samples, validate on 7875 samples
Epoch 1/50
31497/31497 [==============================] - 14s 457us/step - loss: 1.3231 - acc: 0.6119 - val_loss: 0.6654 - val_acc: 0.7884

Epoch 00001: val_acc improved from -inf to 0.78844, saving model to weights.final.from_scratch.hdf5
Epoch 2/50
31497/31497 [==============================] - 9s 290us/step - loss: 0.5288 - acc: 0.8321 - val_loss: 0.3857 - val_acc: 0.8756

Epoch 00002: val_acc improved from 0.78844 to 0.87556, saving model to weights.final.from_scratch.hdf5
Epoch 3/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.3587 - acc: 0.8854 - val_loss: 0.3788 - val_acc: 0.8839

Epoch 00003: val_acc improved from 0.87556 to 0.88394, saving model to weights.final.from_scratch.hdf5
Epoch 4/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.2803 - acc: 0.9115 - val_loss: 0.3272 - val_acc: 0.9021

Epoch 00004: val_acc improved from 0.88394 to 0.90210, saving model to weights.final.from_scratch.hdf5
Epoch 5/50
31497/31497 [==============================] - 9s 297us/step - loss: 0.2371 - acc: 0.9274 - val_loss: 0.3648 - val_acc: 0.8989

Epoch 00005: val_acc did not improve from 0.90210
Epoch 6/50
31497/31497 [==============================] - 10s 315us/step - loss: 0.2080 - acc: 0.9367 - val_loss: 0.3523 - val_acc: 0.9105

Epoch 00006: val_acc improved from 0.90210 to 0.91048, saving model to weights.final.from_scratch.hdf5
Epoch 7/50
31497/31497 [==============================] - 10s 312us/step - loss: 0.1918 - acc: 0.9438 - val_loss: 0.3192 - val_acc: 0.9131

Epoch 00007: val_acc improved from 0.91048 to 0.91314, saving model to weights.final.from_scratch.hdf5
Epoch 8/50
31497/31497 [==============================] - 9s 297us/step - loss: 0.1825 - acc: 0.9470 - val_loss: 0.4688 - val_acc: 0.8997

Epoch 00008: val_acc did not improve from 0.91314
Epoch 9/50
31497/31497 [==============================] - 10s 313us/step - loss: 0.1815 - acc: 0.9510 - val_loss: 0.3521 - val_acc: 0.9205

Epoch 00009: val_acc improved from 0.91314 to 0.92051, saving model to weights.final.from_scratch.hdf5
Epoch 10/50
31497/31497 [==============================] - 10s 303us/step - loss: 0.1760 - acc: 0.9534 - val_loss: 0.3728 - val_acc: 0.9135

Epoch 00010: val_acc did not improve from 0.92051
Epoch 11/50
31497/31497 [==============================] - 9s 297us/step - loss: 0.1737 - acc: 0.9540 - val_loss: 0.4928 - val_acc: 0.9252

Epoch 00011: val_acc improved from 0.92051 to 0.92521, saving model to weights.final.from_scratch.hdf5
Epoch 12/50
31497/31497 [==============================] - 9s 297us/step - loss: 0.1709 - acc: 0.9547 - val_loss: 0.3504 - val_acc: 0.9279

Epoch 00012: val_acc improved from 0.92521 to 0.92787, saving model to weights.final.from_scratch.hdf5
Epoch 13/50
31497/31497 [==============================] - 9s 296us/step - loss: 0.1669 - acc: 0.9587 - val_loss: 0.4601 - val_acc: 0.9029

Epoch 00013: val_acc did not improve from 0.92787
Epoch 14/50
31497/31497 [==============================] - 9s 296us/step - loss: 0.1766 - acc: 0.9590 - val_loss: 0.3873 - val_acc: 0.9246

Epoch 00014: val_acc did not improve from 0.92787
Epoch 15/50
31497/31497 [==============================] - 10s 308us/step - loss: 0.1755 - acc: 0.9583 - val_loss: 0.3911 - val_acc: 0.9168

Epoch 00015: val_acc did not improve from 0.92787
Epoch 16/50
31497/31497 [==============================] - 9s 297us/step - loss: 0.1799 - acc: 0.9595 - val_loss: 0.5933 - val_acc: 0.8947

Epoch 00016: val_acc did not improve from 0.92787
Epoch 17/50
31497/31497 [==============================] - 10s 302us/step - loss: 0.1906 - acc: 0.9582 - val_loss: 0.4779 - val_acc: 0.9164

Epoch 00017: val_acc did not improve from 0.92787
Epoch 18/50
31497/31497 [==============================] - 10s 312us/step - loss: 0.1878 - acc: 0.9595 - val_loss: 0.3752 - val_acc: 0.9267

Epoch 00018: val_acc did not improve from 0.92787
Epoch 19/50
31497/31497 [==============================] - 9s 296us/step - loss: 0.2078 - acc: 0.9603 - val_loss: 0.4575 - val_acc: 0.9262

Epoch 00019: val_acc did not improve from 0.92787
Epoch 20/50
31497/31497 [==============================] - 9s 294us/step - loss: 0.1955 - acc: 0.9615 - val_loss: 0.4487 - val_acc: 0.9341

Epoch 00020: val_acc improved from 0.92787 to 0.93410, saving model to weights.final.from_scratch.hdf5
Epoch 21/50
31497/31497 [==============================] - 9s 295us/step - loss: 0.2073 - acc: 0.9626 - val_loss: 0.8919 - val_acc: 0.8668

Epoch 00021: val_acc did not improve from 0.93410
Epoch 22/50
31497/31497 [==============================] - 9s 294us/step - loss: 0.2105 - acc: 0.9632 - val_loss: 0.5252 - val_acc: 0.9251

Epoch 00022: val_acc did not improve from 0.93410
Epoch 23/50
31497/31497 [==============================] - 9s 293us/step - loss: 0.2069 - acc: 0.9636 - val_loss: 0.4190 - val_acc: 0.9393

Epoch 00023: val_acc improved from 0.93410 to 0.93930, saving model to weights.final.from_scratch.hdf5
Epoch 24/50
31497/31497 [==============================] - 11s 340us/step - loss: 0.2050 - acc: 0.9650 - val_loss: 0.5641 - val_acc: 0.9217

Epoch 00024: val_acc did not improve from 0.93930
Epoch 25/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.2208 - acc: 0.9632 - val_loss: 0.6424 - val_acc: 0.9267

Epoch 00025: val_acc did not improve from 0.93930
Epoch 26/50
31497/31497 [==============================] - 10s 322us/step - loss: 0.2192 - acc: 0.9660 - val_loss: 0.9008 - val_acc: 0.9073

Epoch 00026: val_acc did not improve from 0.93930
Epoch 27/50
31497/31497 [==============================] - 10s 303us/step - loss: 0.2271 - acc: 0.9669 - val_loss: 0.6192 - val_acc: 0.9187

Epoch 00027: val_acc did not improve from 0.93930
Epoch 28/50
31497/31497 [==============================] - 9s 301us/step - loss: 0.2691 - acc: 0.9661 - val_loss: 0.6392 - val_acc: 0.9131

Epoch 00028: val_acc did not improve from 0.93930
Epoch 29/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.2606 - acc: 0.9667 - val_loss: 0.8486 - val_acc: 0.9158

Epoch 00029: val_acc did not improve from 0.93930
Epoch 30/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.2589 - acc: 0.9677 - val_loss: 1.4309 - val_acc: 0.8787

Epoch 00030: val_acc did not improve from 0.93930
Epoch 31/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.2671 - acc: 0.9669 - val_loss: 1.0498 - val_acc: 0.9053

Epoch 00031: val_acc did not improve from 0.93930
Epoch 32/50
31497/31497 [==============================] - 10s 307us/step - loss: 0.3043 - acc: 0.9673 - val_loss: 0.9857 - val_acc: 0.9103

Epoch 00032: val_acc did not improve from 0.93930
Epoch 33/50
31497/31497 [==============================] - 9s 301us/step - loss: 0.2982 - acc: 0.9679 - val_loss: 0.8643 - val_acc: 0.9191

Epoch 00033: val_acc did not improve from 0.93930
Epoch 34/50
31497/31497 [==============================] - 10s 311us/step - loss: 0.3129 - acc: 0.9686 - val_loss: 1.2602 - val_acc: 0.8940

Epoch 00034: val_acc did not improve from 0.93930
Epoch 35/50
31497/31497 [==============================] - 10s 314us/step - loss: 0.3142 - acc: 0.9704 - val_loss: 1.1258 - val_acc: 0.9125

Epoch 00035: val_acc did not improve from 0.93930
Epoch 36/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.4029 - acc: 0.9664 - val_loss: 1.1023 - val_acc: 0.9148

Epoch 00036: val_acc did not improve from 0.93930
Epoch 37/50
31497/31497 [==============================] - 9s 298us/step - loss: 0.4537 - acc: 0.9631 - val_loss: 1.2348 - val_acc: 0.9058

Epoch 00037: val_acc did not improve from 0.93930
Epoch 38/50
31497/31497 [==============================] - 10s 303us/step - loss: 0.3772 - acc: 0.9684 - val_loss: 1.2512 - val_acc: 0.9098

Epoch 00038: val_acc did not improve from 0.93930
Epoch 39/50
31497/31497 [==============================] - 10s 332us/step - loss: 0.4227 - acc: 0.9656 - val_loss: 1.2877 - val_acc: 0.9069

Epoch 00039: val_acc did not improve from 0.93930
Epoch 40/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.4919 - acc: 0.9629 - val_loss: 0.9896 - val_acc: 0.9313

Epoch 00040: val_acc did not improve from 0.93930
Epoch 41/50
31497/31497 [==============================] - 10s 302us/step - loss: 0.4702 - acc: 0.9646 - val_loss: 1.9591 - val_acc: 0.8687

Epoch 00041: val_acc did not improve from 0.93930
Epoch 42/50
31497/31497 [==============================] - 10s 306us/step - loss: 0.6185 - acc: 0.9565 - val_loss: 1.3275 - val_acc: 0.9116

Epoch 00042: val_acc did not improve from 0.93930
Epoch 43/50
31497/31497 [==============================] - 10s 322us/step - loss: 0.7129 - acc: 0.9510 - val_loss: 1.5307 - val_acc: 0.8971

Epoch 00043: val_acc did not improve from 0.93930
Epoch 44/50
31497/31497 [==============================] - 9s 300us/step - loss: 0.8166 - acc: 0.9446 - val_loss: 1.2090 - val_acc: 0.9178

Epoch 00044: val_acc did not improve from 0.93930
Epoch 45/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.6667 - acc: 0.9537 - val_loss: 1.5072 - val_acc: 0.8997

Epoch 00045: val_acc did not improve from 0.93930
Epoch 46/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.6992 - acc: 0.9522 - val_loss: 1.3477 - val_acc: 0.9112

Epoch 00046: val_acc did not improve from 0.93930
Epoch 47/50
31497/31497 [==============================] - 10s 316us/step - loss: 0.7983 - acc: 0.9466 - val_loss: 1.3255 - val_acc: 0.9138

Epoch 00047: val_acc did not improve from 0.93930
Epoch 48/50
31497/31497 [==============================] - 9s 299us/step - loss: 0.8925 - acc: 0.9403 - val_loss: 2.1292 - val_acc: 0.8625

Epoch 00048: val_acc did not improve from 0.93930
Epoch 49/50
31497/31497 [==============================] - 9s 300us/step - loss: 0.8720 - acc: 0.9425 - val_loss: 1.6247 - val_acc: 0.8951

Epoch 00049: val_acc did not improve from 0.93930
Epoch 50/50
31497/31497 [==============================] - 9s 300us/step - loss: 0.9601 - acc: 0.9369 - val_loss: 1.6949 - val_acc: 0.8895

Epoch 00050: val_acc did not improve from 0.93930
Out[0]:
<keras.callbacks.History at 0x7f61a31f37b8>
In [0]:
model.load_weights('weights.final.from_scratch.hdf5')

plant_predictions = [np.argmax(model.predict(np.expand_dims(tensor, axis=0))) for tensor in test_tensors]

# report test accuracy
test_accuracy = 100*np.sum(np.array(plant_predictions)==np.argmax(target_test, axis=1))/len(plant_predictions)
print('Test accuracy: %.4f%%' % test_accuracy)
Test accuracy: 94.0364%
In [0]:
import cv2
class_names = list(class_names)
def prediction_scratch(path):
    tensor = path_to_tensor(path)
    imag = cv2.imread(path)
    plt.imshow(imag)
    plt.show()
    prediction = class_names[np.argmax(model.predict((tensor)))]
    a = list(test)
    index = a.index(path)
    print("Predicted class of image is :", prediction)
   
  
In [0]:
prediction_scratch(test[5000])
Predicted class of image is : Potato___healthy
In [0]:
prediction_scratch(test[100])
Predicted class of image is : Corn_(maize)___Common_rust_
In [0]:
prediction_scratch(test[3600])
Predicted class of image is : Grape___Black_rot